if ( test_and_clear_bit(_PGC_allocated, &page->count_info) )
put_page(page);
+#ifdef DOMAIN_DESTRUCT_AVOID_RECURSION
+ /*
+ * Forcibly drop reference counts of page tables above top most (which
+ * were skipped to prevent long latencies due to deep recursion - see
+ * the special treatment in free_lX_table()).
+ */
+ y = page->u.inuse.type_info;
+ if ( (type < PGT_root_page_table) &&
+ unlikely(((y + PGT_type_mask) &
+ (PGT_type_mask|PGT_validated)) == type) )
+ {
+ BUG_ON((y & PGT_count_mask) >=
+ (page->count_info & PGC_count_mask));
+ while ( y & PGT_count_mask )
+ {
+ put_page_and_type(page);
+ y = page->u.inuse.type_info;
+ }
+ }
+#endif
+
/*
* Forcibly invalidate top-most, still valid page tables at this point
* to break circular 'linear page table' references. This is okay
/* fallthrough */
case RELMEM_done:
+#ifdef DOMAIN_DESTRUCT_AVOID_RECURSION
+ ret = relinquish_memory(d, &d->page_list, PGT_l1_page_table);
+ if ( ret )
+ return ret;
+#endif
break;
default:
l3_pgentry_t *pl3e;
int i;
+#ifdef DOMAIN_DESTRUCT_AVOID_RECURSION
+ if ( d->arch.relmem == RELMEM_dom_l3 )
+ return;
+#endif
+
pl3e = map_domain_page(pfn);
for ( i = 0; i < L3_PAGETABLE_ENTRIES; i++ )
l4_pgentry_t *pl4e = page_to_virt(page);
int i;
+#ifdef DOMAIN_DESTRUCT_AVOID_RECURSION
+ if ( d->arch.relmem == RELMEM_dom_l4 )
+ return;
+#endif
+
for ( i = 0; i < L4_PAGETABLE_ENTRIES; i++ )
if ( is_guest_l4_slot(d, i) )
put_page_from_l4e(pl4e[i], pfn);
#define CONFIG_HOTPLUG 1
#define CONFIG_HOTPLUG_CPU 1
+/*
+ * Avoid deep recursion when tearing down pagetables during domain destruction,
+ * causing dom0 to become unresponsive and Xen to miss time-critical softirq
+ * deadlines. This will ultimately be replaced by built-in preemptibility of
+ * get_page_type().
+ */
+#define DOMAIN_DESTRUCT_AVOID_RECURSION 1
+
#define HZ 100
#define OPT_CONSOLE_STR "vga"